In [1]:
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#   LEARN FCN01 from FCN02
#

from __future__ import print_function
import argparse
import os

import numpy as np
import pickle
from keras import backend as K
from keras.callbacks import ModelCheckpoint
from keras.models import Model
from keras.layers import Input
from keras.layers import Conv2D, MaxPooling2D, UpSampling2D, Concatenate
from keras.layers import merge
from keras.optimizers import Adam, SGD, RMSprop
from keras.preprocessing.image import list_pictures, array_to_img

from image_ext import list_pictures_in_multidir, load_imgs_asarray, img_dice_coeff
from create_fcn import create_fcn01, create_fcn02, create_pupil_net

np.random.seed(2016)
/home/nakazawa_atsushi/anaconda3/envs/py3/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
  from ._conv import register_converters as _register_converters
Using TensorFlow backend.
/home/nakazawa_atsushi/anaconda3/envs/py3/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: compiletime version 3.5 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.6
  return f(*args, **kwds)
In [2]:
def dice_coef(y_true, y_pred):
    y_true = K.flatten(y_true)
    y_pred = K.flatten(y_pred)
    intersection = K.sum(y_true * y_pred)
    return (2.*intersection + 1) / (K.sum(y_true) + K.sum(y_pred) + 1)

def dice_coef_loss(y_true, y_pred):
    return -dice_coef(y_true, y_pred)
In [3]:
def load_fnames(paths):
    f = open(paths)
    data1 = f.read()
    f.close()
    lines = data1.split('\n')
    #print(len(lines))
    # 最終行は空行なので消す
    del(lines[len(lines)-1])
    #print(len(lines))
    return lines
In [4]:
def make_fnames(fnames,fpath,fpath_mask,mask_ext):
    fnames_img = [];
    fnames_mask= [];
    
    for i in range(len(fnames)):
        fnames_img.append(fpath + '/' + fnames[i]);
        fnames_mask.append(fpath_mask + '/' + mask_ext + fnames[i]);
        
    return [fnames_img,fnames_mask]
In [5]:
#
#  MAIN STARTS FROM HERE
#
if __name__ == '__main__':
    
    target_size = (224, 224)
    dpath_this = './'
    dname_checkpoints = 'checkpoints_fcn01'
    dname_checkpoints_fcn02 = 'checkpoints_fcn02'
    dname_outputs = 'outputs'
    fname_architecture = 'architecture.json'
    fname_weights = "model_weights_{epoch:02d}.h5"
    fname_stats = 'stats01.npz'
    dim_ordering = 'channels_first'
    fname_history = "history.pkl"

    # definision of mode, LEARN or TEST or SHOW_HISTORY
    mode = "LEARN"
    #mode = "SHOW_HISTORY"
    #mode = "TEST"

    # モデルを作成
    print('creating model fcn01 and fcn02...')
    model_fcn02 = create_fcn02(target_size)
    model_fcn01 = create_fcn01(target_size)
    
    if os.path.exists(dname_checkpoints) == 0:
        os.mkdir(dname_checkpoints)
creating model fcn01 and fcn02...
In [6]:
#
#   LEARNING MODE
#
if mode == "LEARN":
    # Read Learning Data
    fnames = load_fnames('data/list_train_01.txt')
    [fpaths_xs_train,fpaths_ys_train] = make_fnames(fnames,'data/img','data/mask','OperatorA_')

    X_train = load_imgs_asarray(fpaths_xs_train, grayscale=False, target_size=target_size,
                                dim_ordering=dim_ordering)
    Y_train = load_imgs_asarray(fpaths_ys_train, grayscale=True, target_size=target_size,
                                dim_ordering=dim_ordering) 

    # Read Validation Data
    fnames = load_fnames('data/list_valid_01.txt')
    [fpaths_xs_valid,fpaths_ys_valid] = make_fnames(fnames,'data/img','data/mask','OperatorA_')

    X_valid = load_imgs_asarray(fpaths_xs_valid, grayscale=False, target_size=target_size,
                                dim_ordering=dim_ordering)
    Y_valid = load_imgs_asarray(fpaths_ys_valid, grayscale=True, target_size=target_size,
                                dim_ordering=dim_ordering)     

    print('==> ' + str(len(X_train)) + ' training images loaded')
    print('==> ' + str(len(Y_train)) + ' training masks loaded')
    print('==> ' + str(len(X_valid)) + ' validation images loaded')
    print('==> ' + str(len(Y_valid)) + ' validation masks loaded')

    # 前処理
    print('computing mean and standard deviation...')
    mean = np.mean(X_train, axis=(0, 2, 3))
    std = np.std(X_train, axis=(0, 2, 3))
    print('==> mean: ' + str(mean))
    print('==> std : ' + str(std))

    print('saving mean and standard deviation to ' + fname_stats + '...')
    stats = {'mean': mean, 'std': std}
    np.savez(dname_checkpoints + '/' + fname_stats, **stats)
    print('==> done')

    print('globally normalizing data...')
    for i in range(3):
        X_train[:, i] = (X_train[:, i] - mean[i]) / std[i]
        X_valid[:, i] = (X_valid[:, i] - mean[i]) / std[i]
    Y_train /= 255
    Y_valid /= 255
    print('==> done')
==> 1452 training images loaded
==> 1452 training masks loaded
==> 527 validation images loaded
==> 527 validation masks loaded
computing mean and standard deviation...
==> mean: [130.65465  91.2685   76.63643]
==> std : [55.2817   43.990963 43.113483]
saving mean and standard deviation to stats01.npz...
==> done
globally normalizing data...
==> done
In [ ]:
    # モデルに学習済のfcn02 Weightをロードする
    epoch = 300
    fname_weights = 'model_weights_%02d.h5'%(epoch)
    fpath_weights_fcn02 = os.path.join(dname_checkpoints_fcn02, fname_weights)
    model_fcn02.load_weights(fpath_weights_fcn02)
    print('==> done')

    # load weights from Learned U-NET
    layer_names = ['conv1_1','conv1_2','conv2_1','conv2_2','conv3_1','conv3_2','conv4_1','conv4_2',
                'up1_1', 'up1_2', 'up2_1', 'up2_2', 'up3_1', 'up3_2', 'conv_fin']
    
    print('copying layer weights')
    for name in layer_names:
        print(name)
        model_fcn01.get_layer(name).set_weights(model_fcn02.get_layer(name).get_weights())
        model_fcn01.get_layer(name).trainable = False
    
In [7]:
    # 損失関数,最適化手法を定義
    adam = Adam(lr=1e-5)
    sgd = SGD(lr=0.01, decay=1e-6, momentum=0.1, nesterov=True)
    #rmsprop = RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0)
    model_fcn01.compile(optimizer=adam, loss=dice_coef_loss, metrics=[dice_coef])

    # 構造・重みを保存するディレクトリーの有無を確認
    dpath_checkpoints = os.path.join(dpath_this, dname_checkpoints)
    if not os.path.isdir(dpath_checkpoints):
        os.mkdir(dpath_checkpoints)

    # 重みを保存するためのオブジェクトを用意
    fname_weights = "model_weights_{epoch:02d}.h5"
    fpath_weights = os.path.join(dpath_checkpoints, fname_weights)
    checkpointer = ModelCheckpoint(filepath=fpath_weights, save_best_only=False)      
In [9]:
    # トレーニングを開始
    print('start training...')
    history = model_fcn01.fit(X_train, Y_train, batch_size=64, epochs=200, verbose=1,
                  shuffle=True, validation_data=(X_valid, Y_valid), callbacks=[checkpointer])
start training...
Train on 1452 samples, validate on 527 samples
Epoch 1/200
1452/1452 [==============================] - 78s 54ms/step - loss: -0.0918 - dice_coef: 0.0918 - val_loss: -0.1154 - val_dice_coef: 0.1154
Epoch 2/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.1646 - dice_coef: 0.1646 - val_loss: -0.2956 - val_dice_coef: 0.2956
Epoch 3/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.3932 - dice_coef: 0.3932 - val_loss: -0.5426 - val_dice_coef: 0.5426
Epoch 4/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.5119 - dice_coef: 0.5119 - val_loss: -0.6281 - val_dice_coef: 0.6281
Epoch 5/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.6159 - dice_coef: 0.6159 - val_loss: -0.6683 - val_dice_coef: 0.6683
Epoch 6/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.6460 - dice_coef: 0.6460 - val_loss: -0.4760 - val_dice_coef: 0.4760
Epoch 7/200
1452/1452 [==============================] - 54s 38ms/step - loss: -0.6263 - dice_coef: 0.6263 - val_loss: -0.6670 - val_dice_coef: 0.6670
Epoch 8/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.6795 - dice_coef: 0.6795 - val_loss: -0.7069 - val_dice_coef: 0.7069
Epoch 9/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.6974 - dice_coef: 0.6974 - val_loss: -0.7036 - val_dice_coef: 0.7036
Epoch 10/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7057 - dice_coef: 0.7057 - val_loss: -0.7169 - val_dice_coef: 0.7169
Epoch 11/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.7088 - dice_coef: 0.7088 - val_loss: -0.7102 - val_dice_coef: 0.7102
Epoch 12/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.7109 - dice_coef: 0.7109 - val_loss: -0.6837 - val_dice_coef: 0.6837
Epoch 13/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.7176 - dice_coef: 0.7176 - val_loss: -0.7219 - val_dice_coef: 0.7219
Epoch 14/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.7175 - dice_coef: 0.7175 - val_loss: -0.6620 - val_dice_coef: 0.6620
Epoch 15/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7104 - dice_coef: 0.7104 - val_loss: -0.7325 - val_dice_coef: 0.7325
Epoch 16/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7321 - dice_coef: 0.7321 - val_loss: -0.7403 - val_dice_coef: 0.7403
Epoch 17/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.7301 - dice_coef: 0.7301 - val_loss: -0.7366 - val_dice_coef: 0.7366
Epoch 18/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7393 - dice_coef: 0.7393 - val_loss: -0.7472 - val_dice_coef: 0.7472
Epoch 19/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.7423 - dice_coef: 0.7423 - val_loss: -0.7170 - val_dice_coef: 0.7170
Epoch 20/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7454 - dice_coef: 0.7454 - val_loss: -0.7432 - val_dice_coef: 0.7432
Epoch 21/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7394 - dice_coef: 0.7394 - val_loss: -0.7500 - val_dice_coef: 0.7500
Epoch 22/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7453 - dice_coef: 0.7453 - val_loss: -0.7579 - val_dice_coef: 0.7579
Epoch 23/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7610 - dice_coef: 0.7610 - val_loss: -0.7582 - val_dice_coef: 0.7582
Epoch 24/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7524 - dice_coef: 0.7524 - val_loss: -0.7572 - val_dice_coef: 0.7572
Epoch 25/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7614 - dice_coef: 0.7614 - val_loss: -0.7663 - val_dice_coef: 0.7663
Epoch 26/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7612 - dice_coef: 0.7612 - val_loss: -0.7576 - val_dice_coef: 0.7576
Epoch 27/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7598 - dice_coef: 0.7598 - val_loss: -0.7687 - val_dice_coef: 0.7687
Epoch 28/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7699 - dice_coef: 0.7699 - val_loss: -0.7378 - val_dice_coef: 0.7378
Epoch 29/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7699 - dice_coef: 0.7699 - val_loss: -0.7692 - val_dice_coef: 0.7692
Epoch 30/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7755 - dice_coef: 0.7755 - val_loss: -0.7739 - val_dice_coef: 0.7739
Epoch 31/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7566 - dice_coef: 0.7566 - val_loss: -0.7603 - val_dice_coef: 0.7603
Epoch 32/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7446 - dice_coef: 0.7446 - val_loss: -0.7471 - val_dice_coef: 0.7471
Epoch 33/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7628 - dice_coef: 0.7628 - val_loss: -0.7419 - val_dice_coef: 0.7419
Epoch 34/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7711 - dice_coef: 0.7711 - val_loss: -0.7764 - val_dice_coef: 0.7764
Epoch 35/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7718 - dice_coef: 0.7718 - val_loss: -0.7739 - val_dice_coef: 0.7739
Epoch 36/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7855 - dice_coef: 0.7855 - val_loss: -0.7795 - val_dice_coef: 0.7795
Epoch 37/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7653 - dice_coef: 0.7653 - val_loss: -0.7494 - val_dice_coef: 0.7494
Epoch 38/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7664 - dice_coef: 0.7664 - val_loss: -0.7827 - val_dice_coef: 0.7827
Epoch 39/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7771 - dice_coef: 0.7771 - val_loss: -0.7857 - val_dice_coef: 0.7857
Epoch 40/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7831 - dice_coef: 0.7831 - val_loss: -0.7822 - val_dice_coef: 0.7822
Epoch 41/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7707 - dice_coef: 0.7707 - val_loss: -0.7813 - val_dice_coef: 0.7813
Epoch 42/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7843 - dice_coef: 0.7843 - val_loss: -0.7504 - val_dice_coef: 0.7504
Epoch 43/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7797 - dice_coef: 0.7797 - val_loss: -0.7846 - val_dice_coef: 0.7846
Epoch 44/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7945 - dice_coef: 0.7945 - val_loss: -0.7889 - val_dice_coef: 0.7889
Epoch 45/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7975 - dice_coef: 0.7975 - val_loss: -0.7646 - val_dice_coef: 0.7646
Epoch 46/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7941 - dice_coef: 0.7941 - val_loss: -0.7893 - val_dice_coef: 0.7893
Epoch 47/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7923 - dice_coef: 0.7923 - val_loss: -0.7662 - val_dice_coef: 0.7662
Epoch 48/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7968 - dice_coef: 0.7968 - val_loss: -0.7938 - val_dice_coef: 0.7938
Epoch 49/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8031 - dice_coef: 0.8031 - val_loss: -0.7966 - val_dice_coef: 0.7966
Epoch 50/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7903 - dice_coef: 0.7903 - val_loss: -0.7684 - val_dice_coef: 0.7684
Epoch 51/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7970 - dice_coef: 0.7970 - val_loss: -0.7981 - val_dice_coef: 0.7981
Epoch 52/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8000 - dice_coef: 0.8000 - val_loss: -0.7640 - val_dice_coef: 0.7640
Epoch 53/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8027 - dice_coef: 0.8027 - val_loss: -0.7993 - val_dice_coef: 0.7993
Epoch 54/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8099 - dice_coef: 0.8099 - val_loss: -0.8013 - val_dice_coef: 0.8013
Epoch 55/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8112 - dice_coef: 0.8112 - val_loss: -0.7793 - val_dice_coef: 0.7793
Epoch 56/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8044 - dice_coef: 0.8044 - val_loss: -0.7981 - val_dice_coef: 0.7981
Epoch 57/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7960 - dice_coef: 0.7960 - val_loss: -0.7963 - val_dice_coef: 0.7963
Epoch 58/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8022 - dice_coef: 0.8022 - val_loss: -0.8059 - val_dice_coef: 0.8059
Epoch 59/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7987 - dice_coef: 0.7987 - val_loss: -0.8004 - val_dice_coef: 0.8004
Epoch 60/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.7984 - dice_coef: 0.7984 - val_loss: -0.7949 - val_dice_coef: 0.7949
Epoch 61/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8101 - dice_coef: 0.8101 - val_loss: -0.8035 - val_dice_coef: 0.8035
Epoch 62/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8119 - dice_coef: 0.8119 - val_loss: -0.7513 - val_dice_coef: 0.7513
Epoch 63/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8075 - dice_coef: 0.8075 - val_loss: -0.7957 - val_dice_coef: 0.7957
Epoch 64/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8157 - dice_coef: 0.8157 - val_loss: -0.8032 - val_dice_coef: 0.8032
Epoch 65/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8181 - dice_coef: 0.8181 - val_loss: -0.8071 - val_dice_coef: 0.8071
Epoch 66/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8193 - dice_coef: 0.8193 - val_loss: -0.8038 - val_dice_coef: 0.8038
Epoch 67/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8110 - dice_coef: 0.8110 - val_loss: -0.7907 - val_dice_coef: 0.7907
Epoch 68/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8186 - dice_coef: 0.8186 - val_loss: -0.8137 - val_dice_coef: 0.8137
Epoch 69/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8233 - dice_coef: 0.8233 - val_loss: -0.7961 - val_dice_coef: 0.7961
Epoch 70/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8194 - dice_coef: 0.8194 - val_loss: -0.8159 - val_dice_coef: 0.8159
Epoch 71/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8109 - dice_coef: 0.8109 - val_loss: -0.8125 - val_dice_coef: 0.8125
Epoch 72/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8178 - dice_coef: 0.8178 - val_loss: -0.8162 - val_dice_coef: 0.8162
Epoch 73/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8177 - dice_coef: 0.8177 - val_loss: -0.8131 - val_dice_coef: 0.8131
Epoch 74/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8285 - dice_coef: 0.8285 - val_loss: -0.8116 - val_dice_coef: 0.8116
Epoch 75/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8169 - dice_coef: 0.8169 - val_loss: -0.7789 - val_dice_coef: 0.7789
Epoch 76/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8186 - dice_coef: 0.8186 - val_loss: -0.8109 - val_dice_coef: 0.8109
Epoch 77/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8268 - dice_coef: 0.8268 - val_loss: -0.8208 - val_dice_coef: 0.8208
Epoch 78/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8303 - dice_coef: 0.8303 - val_loss: -0.7974 - val_dice_coef: 0.7974
Epoch 79/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8247 - dice_coef: 0.8247 - val_loss: -0.7964 - val_dice_coef: 0.7964
Epoch 80/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8299 - dice_coef: 0.8299 - val_loss: -0.8247 - val_dice_coef: 0.8247
Epoch 81/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8343 - dice_coef: 0.8343 - val_loss: -0.8183 - val_dice_coef: 0.8183
Epoch 82/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8361 - dice_coef: 0.8361 - val_loss: -0.8079 - val_dice_coef: 0.8079
Epoch 83/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8318 - dice_coef: 0.8318 - val_loss: -0.8237 - val_dice_coef: 0.8237
Epoch 84/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8332 - dice_coef: 0.8332 - val_loss: -0.8260 - val_dice_coef: 0.8260
Epoch 85/200
1452/1452 [==============================] - 54s 38ms/step - loss: -0.8376 - dice_coef: 0.8376 - val_loss: -0.8250 - val_dice_coef: 0.8250
Epoch 86/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8373 - dice_coef: 0.8373 - val_loss: -0.8233 - val_dice_coef: 0.8233
Epoch 87/200
1452/1452 [==============================] - 54s 38ms/step - loss: -0.8317 - dice_coef: 0.8317 - val_loss: -0.8052 - val_dice_coef: 0.8052
Epoch 88/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8308 - dice_coef: 0.8308 - val_loss: -0.8181 - val_dice_coef: 0.8181
Epoch 89/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8354 - dice_coef: 0.8354 - val_loss: -0.8202 - val_dice_coef: 0.8202
Epoch 90/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8402 - dice_coef: 0.8402 - val_loss: -0.8182 - val_dice_coef: 0.8182
Epoch 91/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8366 - dice_coef: 0.8366 - val_loss: -0.8256 - val_dice_coef: 0.8256
Epoch 92/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8386 - dice_coef: 0.8386 - val_loss: -0.8159 - val_dice_coef: 0.8159
Epoch 93/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8370 - dice_coef: 0.8370 - val_loss: -0.8120 - val_dice_coef: 0.8120
Epoch 94/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8377 - dice_coef: 0.8377 - val_loss: -0.8284 - val_dice_coef: 0.8284
Epoch 95/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8430 - dice_coef: 0.8430 - val_loss: -0.8305 - val_dice_coef: 0.8305
Epoch 96/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8444 - dice_coef: 0.8444 - val_loss: -0.8308 - val_dice_coef: 0.8308
Epoch 97/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8421 - dice_coef: 0.8421 - val_loss: -0.8294 - val_dice_coef: 0.8294
Epoch 98/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8409 - dice_coef: 0.8409 - val_loss: -0.8335 - val_dice_coef: 0.8335
Epoch 99/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8331 - dice_coef: 0.8331 - val_loss: -0.8037 - val_dice_coef: 0.8037
Epoch 100/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8368 - dice_coef: 0.8368 - val_loss: -0.8265 - val_dice_coef: 0.8265
Epoch 101/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8343 - dice_coef: 0.8343 - val_loss: -0.8199 - val_dice_coef: 0.8199
Epoch 102/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8342 - dice_coef: 0.8342 - val_loss: -0.8302 - val_dice_coef: 0.8302
Epoch 103/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8423 - dice_coef: 0.8423 - val_loss: -0.8364 - val_dice_coef: 0.8364
Epoch 104/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8466 - dice_coef: 0.8466 - val_loss: -0.8295 - val_dice_coef: 0.8295
Epoch 105/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8481 - dice_coef: 0.8481 - val_loss: -0.8365 - val_dice_coef: 0.8365
Epoch 106/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8469 - dice_coef: 0.8469 - val_loss: -0.8379 - val_dice_coef: 0.8379
Epoch 107/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8495 - dice_coef: 0.8495 - val_loss: -0.7988 - val_dice_coef: 0.7988
Epoch 108/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8435 - dice_coef: 0.8435 - val_loss: -0.8289 - val_dice_coef: 0.8289
Epoch 109/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8488 - dice_coef: 0.8488 - val_loss: -0.8389 - val_dice_coef: 0.8389
Epoch 110/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8445 - dice_coef: 0.8445 - val_loss: -0.8338 - val_dice_coef: 0.8338
Epoch 111/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8510 - dice_coef: 0.8510 - val_loss: -0.8390 - val_dice_coef: 0.8390
Epoch 112/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8533 - dice_coef: 0.8533 - val_loss: -0.8404 - val_dice_coef: 0.8404
Epoch 113/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8504 - dice_coef: 0.8504 - val_loss: -0.8370 - val_dice_coef: 0.8370
Epoch 114/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8521 - dice_coef: 0.8521 - val_loss: -0.8414 - val_dice_coef: 0.8414
Epoch 115/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8534 - dice_coef: 0.8534 - val_loss: -0.8383 - val_dice_coef: 0.8383
Epoch 116/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8526 - dice_coef: 0.8526 - val_loss: -0.8244 - val_dice_coef: 0.8244
Epoch 117/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8508 - dice_coef: 0.8508 - val_loss: -0.8403 - val_dice_coef: 0.8403
Epoch 118/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8517 - dice_coef: 0.8517 - val_loss: -0.8211 - val_dice_coef: 0.8211
Epoch 119/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8524 - dice_coef: 0.8524 - val_loss: -0.8398 - val_dice_coef: 0.8398
Epoch 120/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8514 - dice_coef: 0.8514 - val_loss: -0.8394 - val_dice_coef: 0.8394
Epoch 121/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8446 - dice_coef: 0.8446 - val_loss: -0.8372 - val_dice_coef: 0.8372
Epoch 122/200
1452/1452 [==============================] - 55s 38ms/step - loss: -0.8457 - dice_coef: 0.8457 - val_loss: -0.8428 - val_dice_coef: 0.8428
Epoch 123/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8560 - dice_coef: 0.8560 - val_loss: -0.8400 - val_dice_coef: 0.8400
Epoch 124/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8565 - dice_coef: 0.8565 - val_loss: -0.8283 - val_dice_coef: 0.8283
Epoch 125/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8536 - dice_coef: 0.8536 - val_loss: -0.8384 - val_dice_coef: 0.8384
Epoch 126/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8581 - dice_coef: 0.8581 - val_loss: -0.8232 - val_dice_coef: 0.8232
Epoch 127/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8518 - dice_coef: 0.8518 - val_loss: -0.8425 - val_dice_coef: 0.8425
Epoch 128/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8588 - dice_coef: 0.8588 - val_loss: -0.8447 - val_dice_coef: 0.8447
Epoch 129/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8585 - dice_coef: 0.8585 - val_loss: -0.8452 - val_dice_coef: 0.8452
Epoch 130/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8600 - dice_coef: 0.8600 - val_loss: -0.8468 - val_dice_coef: 0.8468
Epoch 131/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8573 - dice_coef: 0.8573 - val_loss: -0.8293 - val_dice_coef: 0.8293
Epoch 132/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8565 - dice_coef: 0.8565 - val_loss: -0.8464 - val_dice_coef: 0.8464
Epoch 133/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8567 - dice_coef: 0.8567 - val_loss: -0.8392 - val_dice_coef: 0.8392
Epoch 134/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8560 - dice_coef: 0.8560 - val_loss: -0.8251 - val_dice_coef: 0.8251
Epoch 135/200
1452/1452 [==============================] - 54s 38ms/step - loss: -0.8541 - dice_coef: 0.8541 - val_loss: -0.8401 - val_dice_coef: 0.8401
Epoch 136/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8573 - dice_coef: 0.8573 - val_loss: -0.8470 - val_dice_coef: 0.8470
Epoch 137/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8611 - dice_coef: 0.8611 - val_loss: -0.8474 - val_dice_coef: 0.8474
Epoch 138/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8632 - dice_coef: 0.8632 - val_loss: -0.8429 - val_dice_coef: 0.8429
Epoch 139/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8601 - dice_coef: 0.8601 - val_loss: -0.8407 - val_dice_coef: 0.8407
Epoch 140/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8591 - dice_coef: 0.8591 - val_loss: -0.8441 - val_dice_coef: 0.8441
Epoch 141/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8632 - dice_coef: 0.8632 - val_loss: -0.8470 - val_dice_coef: 0.8470
Epoch 142/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8642 - dice_coef: 0.8642 - val_loss: -0.8442 - val_dice_coef: 0.8442
Epoch 143/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8590 - dice_coef: 0.8590 - val_loss: -0.8473 - val_dice_coef: 0.8473
Epoch 144/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8602 - dice_coef: 0.8602 - val_loss: -0.8465 - val_dice_coef: 0.8465
Epoch 145/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8627 - dice_coef: 0.8627 - val_loss: -0.8459 - val_dice_coef: 0.8459
Epoch 146/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8607 - dice_coef: 0.8607 - val_loss: -0.8272 - val_dice_coef: 0.8272
Epoch 147/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8577 - dice_coef: 0.8577 - val_loss: -0.8468 - val_dice_coef: 0.8468
Epoch 148/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8641 - dice_coef: 0.8641 - val_loss: -0.8479 - val_dice_coef: 0.8479
Epoch 149/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8647 - dice_coef: 0.8647 - val_loss: -0.8425 - val_dice_coef: 0.8425
Epoch 150/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8628 - dice_coef: 0.8628 - val_loss: -0.8483 - val_dice_coef: 0.8483
Epoch 151/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8626 - dice_coef: 0.8626 - val_loss: -0.8346 - val_dice_coef: 0.8346
Epoch 152/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8598 - dice_coef: 0.8598 - val_loss: -0.8492 - val_dice_coef: 0.8492
Epoch 153/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8636 - dice_coef: 0.8636 - val_loss: -0.8486 - val_dice_coef: 0.8486
Epoch 154/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8518 - dice_coef: 0.8518 - val_loss: -0.8464 - val_dice_coef: 0.8464
Epoch 155/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8640 - dice_coef: 0.8640 - val_loss: -0.8412 - val_dice_coef: 0.8412
Epoch 156/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8631 - dice_coef: 0.8631 - val_loss: -0.8491 - val_dice_coef: 0.8491
Epoch 157/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8668 - dice_coef: 0.8668 - val_loss: -0.8492 - val_dice_coef: 0.8492
Epoch 158/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8638 - dice_coef: 0.8638 - val_loss: -0.8515 - val_dice_coef: 0.8515
Epoch 159/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8666 - dice_coef: 0.8666 - val_loss: -0.8503 - val_dice_coef: 0.8503
Epoch 160/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8651 - dice_coef: 0.8651 - val_loss: -0.8475 - val_dice_coef: 0.8475
Epoch 161/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8644 - dice_coef: 0.8644 - val_loss: -0.8518 - val_dice_coef: 0.8518
Epoch 162/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8699 - dice_coef: 0.8699 - val_loss: -0.8468 - val_dice_coef: 0.8468
Epoch 163/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8705 - dice_coef: 0.8705 - val_loss: -0.8534 - val_dice_coef: 0.8534
Epoch 164/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8680 - dice_coef: 0.8680 - val_loss: -0.8415 - val_dice_coef: 0.8415
Epoch 165/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8689 - dice_coef: 0.8689 - val_loss: -0.8522 - val_dice_coef: 0.8522
Epoch 166/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8710 - dice_coef: 0.8710 - val_loss: -0.8532 - val_dice_coef: 0.8532
Epoch 167/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8705 - dice_coef: 0.8705 - val_loss: -0.8401 - val_dice_coef: 0.8401
Epoch 168/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8667 - dice_coef: 0.8667 - val_loss: -0.8526 - val_dice_coef: 0.8526
Epoch 169/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8702 - dice_coef: 0.8702 - val_loss: -0.8443 - val_dice_coef: 0.8443
Epoch 170/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8715 - dice_coef: 0.8715 - val_loss: -0.8504 - val_dice_coef: 0.8504
Epoch 171/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8670 - dice_coef: 0.8670 - val_loss: -0.8549 - val_dice_coef: 0.8549
Epoch 172/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8691 - dice_coef: 0.8691 - val_loss: -0.8519 - val_dice_coef: 0.8519
Epoch 173/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8691 - dice_coef: 0.8691 - val_loss: -0.8486 - val_dice_coef: 0.8486
Epoch 174/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8723 - dice_coef: 0.8723 - val_loss: -0.8547 - val_dice_coef: 0.8547
Epoch 175/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8695 - dice_coef: 0.8695 - val_loss: -0.8416 - val_dice_coef: 0.8416
Epoch 176/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8712 - dice_coef: 0.8712 - val_loss: -0.8531 - val_dice_coef: 0.8531
Epoch 177/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8708 - dice_coef: 0.8708 - val_loss: -0.8563 - val_dice_coef: 0.8563
Epoch 178/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8608 - dice_coef: 0.8608 - val_loss: -0.8505 - val_dice_coef: 0.8505
Epoch 179/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8649 - dice_coef: 0.8649 - val_loss: -0.8527 - val_dice_coef: 0.8527
Epoch 180/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8730 - dice_coef: 0.8730 - val_loss: -0.8410 - val_dice_coef: 0.8410
Epoch 181/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8713 - dice_coef: 0.8713 - val_loss: -0.8436 - val_dice_coef: 0.8436
Epoch 182/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8702 - dice_coef: 0.8702 - val_loss: -0.8446 - val_dice_coef: 0.8446
Epoch 183/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8722 - dice_coef: 0.8722 - val_loss: -0.8537 - val_dice_coef: 0.8537
Epoch 184/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8717 - dice_coef: 0.8717 - val_loss: -0.8540 - val_dice_coef: 0.8540
Epoch 185/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8719 - dice_coef: 0.8719 - val_loss: -0.8410 - val_dice_coef: 0.8410
Epoch 186/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8643 - dice_coef: 0.8643 - val_loss: -0.8530 - val_dice_coef: 0.8530
Epoch 187/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8752 - dice_coef: 0.8752 - val_loss: -0.8510 - val_dice_coef: 0.8510
Epoch 188/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8678 - dice_coef: 0.8678 - val_loss: -0.8563 - val_dice_coef: 0.8563
Epoch 189/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8722 - dice_coef: 0.8722 - val_loss: -0.8518 - val_dice_coef: 0.8518
Epoch 190/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8725 - dice_coef: 0.8725 - val_loss: -0.8576 - val_dice_coef: 0.8576
Epoch 191/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8768 - dice_coef: 0.8768 - val_loss: -0.8502 - val_dice_coef: 0.8502
Epoch 192/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8772 - dice_coef: 0.8772 - val_loss: -0.8551 - val_dice_coef: 0.8551
Epoch 193/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8737 - dice_coef: 0.8737 - val_loss: -0.8486 - val_dice_coef: 0.8486
Epoch 194/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8606 - dice_coef: 0.8606 - val_loss: -0.8321 - val_dice_coef: 0.8321
Epoch 195/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8718 - dice_coef: 0.8718 - val_loss: -0.8358 - val_dice_coef: 0.8358
Epoch 196/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8760 - dice_coef: 0.8760 - val_loss: -0.8537 - val_dice_coef: 0.8537
Epoch 197/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8745 - dice_coef: 0.8745 - val_loss: -0.8553 - val_dice_coef: 0.8553
Epoch 198/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8752 - dice_coef: 0.8752 - val_loss: -0.8587 - val_dice_coef: 0.8587
Epoch 199/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8754 - dice_coef: 0.8754 - val_loss: -0.8576 - val_dice_coef: 0.8576
Epoch 200/200
1452/1452 [==============================] - 54s 37ms/step - loss: -0.8762 - dice_coef: 0.8762 - val_loss: -0.8589 - val_dice_coef: 0.8589
In [10]:
    # Save History
    f = open(dname_checkpoints + '/' + fname_history,'wb')
    pickle.dump(history.history,f)
    f.close
Out[10]:
<function BufferedWriter.close>
In [14]:
#
#  TEST MODE
#
mode = 'TEST'
if mode == "TEST":
    # Prediction (test) mode

    # 学習済みの重みをロード
    epoch = 200
    fname_weights = 'model_weights_%02d.h5'%(epoch)
    fpath_weights = os.path.join(dname_checkpoints, fname_weights)
    model_fcn01.load_weights(fpath_weights)
    print('==> done')
==> done
In [15]:
    # Read Test Data
    fnames = load_fnames('data/list_test_01.txt')

    [fpaths_xs_test,fpaths_ys_test] = make_fnames(fnames,'data/img','data/mask','OperatorA_')

    X_test = load_imgs_asarray(fpaths_xs_test, grayscale=False, target_size=target_size,
                                dim_ordering=dim_ordering)
    #Y_test = load_imgs_asarray(fpaths_ys_test, grayscale=True, target_size=target_size,
    #                            dim_ordering=dim_ordering)

    # トレーニング時に計算した平均・標準偏差をロード    
    print('loading mean and standard deviation from ' + fname_stats + '...')
    stats = np.load(dname_checkpoints + '/' + fname_stats)
    mean = stats['mean']
    std = stats['std']
    print('==> mean: ' + str(mean))
    print('==> std : ' + str(std))

    for i in range(3):
        X_test[:, i] = (X_test[:, i] - mean[i]) / std[i]
    print('==> done')
loading mean and standard deviation from stats01.npz...
==> mean: [130.65465  91.2685   76.63643]
==> std : [55.2817   43.990963 43.113483]
==> done
In [16]:
    # テストを開始
    outputs = model_fcn01.predict(X_test)
#    outputs = model_fcn02.predict(X_test)
    
In [17]:
    # 出力を画像として保存
    dname_outputs = './outputs/'
    if not os.path.isdir(dname_outputs):
        print('create directory: %s'%(dname_outputs))
        os.mkdir(dname_outputs)

    print('saving outputs as images...')
    n = 0
    for i, array in enumerate(outputs):
        array = np.where(array > 0.5, 1, 0) # 二値に変換
        array = array.astype(np.float32)
        img_out = array_to_img(array, dim_ordering)
        # fpath_out = os.path.join(dname_outputs, fnames[i])
        fpath_out = os.path.join(dname_outputs, "%05d.png"%(n))
        img_out.save(fpath_out)
        n = n + 1

    print('==> done')
saving outputs as images...
==> done
In [21]:
    from PIL import Image
    import matplotlib.pyplot as plt

    n = 0
    dice_eval = []
    
    for i in range(len(fpaths_xs_test)):
        # テスト画像
        im1 = Image.open(fpaths_xs_test[i])
        im1 = im1.resize((320,240)) 
        # 出力結果
        im2 = Image.open(os.path.join(dname_outputs, "%05d.png"%(n)))
        im2 = im2.resize((320,240))
        # Grond Truth
        im3 = Image.open(fpaths_ys_test[i])
        im3 = im3.resize((320,240))

        im2_d = np.zeros((240,320,3), 'uint8')
        im2_d[:,:,0] = np.array(im2)
        im2_d[:,:,1] = np.array(im3)*255
        im2_d[:,:,2] = 0

        # Compute dice coeff
        im2a = np.array(im2)
        im2a[im2a > 0] = 1
        im3a = np.array(im3)
        im3a[im3a > 0] = 1
        
        overlap_a = np.array(im2a) * np.array(im3a)
        overlap_b = np.array(im2a) + np.array(im3a)
        print('%03d: Dice Coeff = %f'%(i, 2*sum(sum(overlap_a))/sum(sum(overlap_b))))
        print('%f'%img_dice_coeff(im2,im3))
        dice_eval.append(2*sum(sum(overlap_a))/sum(sum(overlap_b)))

        plt.imshow(np.hstack((np.array(im1),np.array(im2_d))))
        plt.show()

        n = n + 1
    
    print('Dice eval av. : %f'%np.mean(np.array(dice_eval)))
000: Dice Coeff = 0.899851
0.899851
001: Dice Coeff = 0.738682
0.738682
002: Dice Coeff = 0.869748
0.869748
003: Dice Coeff = 0.820842
0.820842
004: Dice Coeff = 0.729443
0.729443
005: Dice Coeff = 0.766404
0.766404
006: Dice Coeff = 0.766885
0.766885
007: Dice Coeff = 0.752830
0.752830
008: Dice Coeff = 0.870311
0.870311
009: Dice Coeff = 0.859688
0.859688
010: Dice Coeff = 0.926719
0.926719
011: Dice Coeff = 0.555901
0.555901
012: Dice Coeff = 0.818182
0.818182
013: Dice Coeff = 0.960317
0.960317
014: Dice Coeff = 0.635920
0.635920
015: Dice Coeff = 0.865707
0.865707
016: Dice Coeff = 0.874627
0.874627
017: Dice Coeff = 0.921438
0.921438
018: Dice Coeff = 0.899204
0.899204
019: Dice Coeff = 0.903614
0.903614
020: Dice Coeff = 0.876738
0.876738
021: Dice Coeff = 0.789474
0.789474
022: Dice Coeff = 0.882250
0.882250
023: Dice Coeff = 0.968654
0.968654
024: Dice Coeff = 0.958386
0.958386
025: Dice Coeff = 0.909297
0.909297
026: Dice Coeff = 0.944810
0.944810
027: Dice Coeff = 0.905051
0.905051
028: Dice Coeff = 0.932961
0.932961
029: Dice Coeff = 0.844391
0.844391
030: Dice Coeff = 0.933333
0.933333
031: Dice Coeff = 0.956618
0.956618
032: Dice Coeff = 0.894091
0.894091
033: Dice Coeff = 0.804598
0.804598
034: Dice Coeff = 0.942209
0.942209
035: Dice Coeff = 0.890715
0.890715
036: Dice Coeff = 0.874743
0.874743
037: Dice Coeff = 0.845369
0.845369
038: Dice Coeff = 0.751445
0.751445
039: Dice Coeff = 0.852564
0.852564
040: Dice Coeff = 0.873700
0.873700
041: Dice Coeff = 0.942714
0.942714
042: Dice Coeff = 0.926499
0.926499
043: Dice Coeff = 0.841584
0.841584
044: Dice Coeff = 0.887439
0.887439
045: Dice Coeff = 0.861518
0.861518
046: Dice Coeff = 0.924242
0.924242
047: Dice Coeff = 0.958018
0.958018
048: Dice Coeff = 0.893004
0.893004
049: Dice Coeff = 0.790607
0.790607
050: Dice Coeff = 0.888889
0.888889
051: Dice Coeff = 0.855377
0.855377
052: Dice Coeff = 0.939457
0.939457
053: Dice Coeff = 0.934579
0.934579
054: Dice Coeff = 0.925307
0.925307
055: Dice Coeff = 0.756303
0.756303
056: Dice Coeff = 0.915646
0.915646
057: Dice Coeff = 0.866477
0.866477
058: Dice Coeff = 0.923864
0.923864
059: Dice Coeff = 0.937557
0.937557
060: Dice Coeff = 0.869707
0.869707
061: Dice Coeff = 0.891599
0.891599
062: Dice Coeff = 0.882658
0.882658
063: Dice Coeff = 0.852941
0.852941
064: Dice Coeff = 0.772016
0.772016
065: Dice Coeff = 0.883146
0.883146
066: Dice Coeff = 0.777969
0.777969
067: Dice Coeff = 0.877657
0.877657
068: Dice Coeff = 0.890835
0.890835
069: Dice Coeff = 0.656906
0.656906
070: Dice Coeff = 0.847571
0.847571
071: Dice Coeff = 0.886680
0.886680
072: Dice Coeff = 0.879808
0.879808
073: Dice Coeff = 0.919774
0.919774
074: Dice Coeff = 0.883249
0.883249
075: Dice Coeff = 0.883227
0.883227
076: Dice Coeff = 0.756129
0.756129
077: Dice Coeff = 0.776952
0.776952
078: Dice Coeff = 0.723455
0.723455
079: Dice Coeff = 0.859091
0.859091
080: Dice Coeff = 0.799847
0.799847
081: Dice Coeff = 0.664356
0.664356
082: Dice Coeff = 0.594912
0.594912
083: Dice Coeff = 0.871935
0.871935
084: Dice Coeff = 0.805556
0.805556
085: Dice Coeff = 0.825137
0.825137
086: Dice Coeff = 0.765543
0.765543
087: Dice Coeff = 0.671551
0.671551
088: Dice Coeff = 0.910369
0.910369
089: Dice Coeff = 0.857790
0.857790
090: Dice Coeff = 0.822238
0.822238
091: Dice Coeff = 0.907486
0.907486
092: Dice Coeff = 0.930616
0.930616
093: Dice Coeff = 0.809422
0.809422
094: Dice Coeff = 0.919271
0.919271
095: Dice Coeff = 0.619883
0.619883
096: Dice Coeff = 0.788000
0.788000
097: Dice Coeff = 0.865072
0.865072
098: Dice Coeff = 0.825737
0.825737
099: Dice Coeff = 0.795556
0.795556
100: Dice Coeff = 0.784409
0.784409
101: Dice Coeff = 0.698639
0.698639
102: Dice Coeff = 0.607725
0.607725
103: Dice Coeff = 0.802834
0.802834
104: Dice Coeff = 0.651828
0.651828
105: Dice Coeff = 0.747145
0.747145
106: Dice Coeff = 0.665789
0.665789
107: Dice Coeff = 0.838870
0.838870
108: Dice Coeff = 0.751890
0.751890
109: Dice Coeff = 0.667461
0.667461
110: Dice Coeff = 0.823529
0.823529
111: Dice Coeff = 0.896209
0.896209
112: Dice Coeff = 0.756225
0.756225
113: Dice Coeff = 0.931507
0.931507
114: Dice Coeff = 0.905356
0.905356
115: Dice Coeff = 0.559767
0.559767
116: Dice Coeff = 0.899866
0.899866
117: Dice Coeff = 0.890667
0.890667
118: Dice Coeff = 0.883295
0.883295
119: Dice Coeff = 0.944444
0.944444
120: Dice Coeff = 0.887139
0.887139
121: Dice Coeff = 0.796380
0.796380
122: Dice Coeff = 0.890019
0.890019
123: Dice Coeff = 0.879855
0.879855
124: Dice Coeff = 0.856378
0.856378
125: Dice Coeff = 0.817043
0.817043
126: Dice Coeff = 0.729483
0.729483
127: Dice Coeff = 0.921273
0.921273
128: Dice Coeff = 0.640100
0.640100
129: Dice Coeff = 0.840090
0.840090
130: Dice Coeff = 0.863362
0.863362
131: Dice Coeff = 0.898907
0.898907
132: Dice Coeff = 0.917836
0.917836
133: Dice Coeff = 0.874610
0.874610
134: Dice Coeff = 0.783547
0.783547
135: Dice Coeff = 0.844195
0.844195
136: Dice Coeff = 0.899041
0.899041
137: Dice Coeff = 0.080560
0.080560
138: Dice Coeff = 0.946648
0.946648
139: Dice Coeff = 0.876344
0.876344
140: Dice Coeff = 0.911796
0.911796
141: Dice Coeff = 0.456825
0.456825
142: Dice Coeff = 0.739935
0.739935
143: Dice Coeff = 0.872727
0.872727
144: Dice Coeff = 0.883632
0.883632
145: Dice Coeff = 0.875385
0.875385
146: Dice Coeff = 0.849244
0.849244
147: Dice Coeff = 0.927921
0.927921
148: Dice Coeff = 0.823117
0.823117
149: Dice Coeff = 0.830075
0.830075
150: Dice Coeff = 0.808717
0.808717
151: Dice Coeff = 0.762510
0.762510
152: Dice Coeff = 0.823529
0.823529
153: Dice Coeff = 0.624733
0.624733
154: Dice Coeff = 0.869369
0.869369
155: Dice Coeff = 0.775606
0.775606
156: Dice Coeff = 0.815890
0.815890
157: Dice Coeff = 0.648199
0.648199
158: Dice Coeff = 0.916843
0.916843
159: Dice Coeff = 0.723898
0.723898
160: Dice Coeff = 0.788413
0.788413
161: Dice Coeff = 0.740883
0.740883
162: Dice Coeff = 0.879640
0.879640
163: Dice Coeff = 0.857143
0.857143
164: Dice Coeff = 0.772983
0.772983
165: Dice Coeff = 0.609808
0.609808
166: Dice Coeff = 0.841823
0.841823
167: Dice Coeff = 0.828135
0.828135
168: Dice Coeff = 0.757489
0.757489
169: Dice Coeff = 0.848546
0.848546
170: Dice Coeff = 0.669903
0.669903
171: Dice Coeff = 0.778281
0.778281
172: Dice Coeff = 0.780220
0.780220
173: Dice Coeff = 0.854902
0.854902
174: Dice Coeff = 0.905547
0.905547
175: Dice Coeff = 0.856525
0.856525
176: Dice Coeff = 0.748428
0.748428
177: Dice Coeff = 0.894376
0.894376
178: Dice Coeff = 0.796584
0.796584
179: Dice Coeff = 0.806826
0.806826
180: Dice Coeff = 0.807773
0.807773
181: Dice Coeff = 0.865385
0.865385
182: Dice Coeff = 0.736739
0.736739
183: Dice Coeff = 0.535211
0.535211
184: Dice Coeff = 0.782145
0.782145
185: Dice Coeff = 0.839339
0.839339
186: Dice Coeff = 0.720000
0.720000
187: Dice Coeff = 0.696219
0.696219
188: Dice Coeff = 0.833834
0.833834
189: Dice Coeff = 0.768212
0.768212
190: Dice Coeff = 0.698669
0.698669
191: Dice Coeff = 0.805643
0.805643
192: Dice Coeff = 0.350140
0.350140
193: Dice Coeff = 0.785789
0.785789
194: Dice Coeff = 0.336066
0.336066
195: Dice Coeff = 0.717557
0.717557
196: Dice Coeff = 0.838095
0.838095
197: Dice Coeff = 0.779968
0.779968
198: Dice Coeff = 0.843900
0.843900
199: Dice Coeff = 0.854421
0.854421
200: Dice Coeff = 0.857510
0.857510
201: Dice Coeff = 0.818452
0.818452
202: Dice Coeff = 0.657277
0.657277
203: Dice Coeff = 0.829305
0.829305
204: Dice Coeff = 0.772000
0.772000
205: Dice Coeff = 0.904239
0.904239
206: Dice Coeff = 0.875817
0.875817
207: Dice Coeff = 0.931891
0.931891
208: Dice Coeff = 0.802521
0.802521
209: Dice Coeff = 0.898026
0.898026
210: Dice Coeff = 0.909520
0.909520
211: Dice Coeff = 0.942316
0.942316
212: Dice Coeff = 0.757692
0.757692
213: Dice Coeff = 0.798403
0.798403
214: Dice Coeff = 0.837675
0.837675
215: Dice Coeff = 0.933468
0.933468
216: Dice Coeff = 0.720497
0.720497
217: Dice Coeff = 0.804401
0.804401
218: Dice Coeff = 0.821951
0.821951
219: Dice Coeff = 0.905035
0.905035
220: Dice Coeff = 0.850649
0.850649
221: Dice Coeff = 0.898236
0.898236
222: Dice Coeff = 0.781145
0.781145
223: Dice Coeff = 0.870990
0.870990
224: Dice Coeff = 0.767947
0.767947
225: Dice Coeff = 0.845390
0.845390
226: Dice Coeff = 0.757372
0.757372
227: Dice Coeff = 0.867446
0.867446
228: Dice Coeff = 0.744257
0.744257
229: Dice Coeff = 0.848580
0.848580
230: Dice Coeff = 0.869425
0.869425
231: Dice Coeff = 0.942931
0.942931
232: Dice Coeff = 0.694387
0.694387
233: Dice Coeff = 0.807080
0.807080
234: Dice Coeff = 0.866787
0.866787
235: Dice Coeff = 0.927265
0.927265
236: Dice Coeff = 0.710480
0.710480
237: Dice Coeff = 0.622500
0.622500
238: Dice Coeff = 0.867442
0.867442
239: Dice Coeff = 0.897426
0.897426
240: Dice Coeff = 0.855754
0.855754
241: Dice Coeff = 0.587838
0.587838
242: Dice Coeff = 0.639437
0.639437
243: Dice Coeff = 0.860733
0.860733
244: Dice Coeff = 0.885167
0.885167
245: Dice Coeff = 0.934150
0.934150
246: Dice Coeff = 0.884163
0.884163
247: Dice Coeff = 0.900148
0.900148
248: Dice Coeff = 0.880597
0.880597
249: Dice Coeff = 0.833333
0.833333
250: Dice Coeff = 0.811944
0.811944
251: Dice Coeff = 0.834050
0.834050
252: Dice Coeff = 0.823308
0.823308
253: Dice Coeff = 0.772932
0.772932
254: Dice Coeff = 0.797896
0.797896
255: Dice Coeff = 0.692191
0.692191
256: Dice Coeff = 0.801782
0.801782
257: Dice Coeff = 0.910638
0.910638
258: Dice Coeff = 0.642100
0.642100
259: Dice Coeff = 0.867635
0.867635
260: Dice Coeff = 0.560113
0.560113
261: Dice Coeff = 0.658397
0.658397
262: Dice Coeff = 0.760504
0.760504
263: Dice Coeff = 0.772215
0.772215
264: Dice Coeff = 0.517572
0.517572
265: Dice Coeff = 0.831606
0.831606
266: Dice Coeff = 0.660652
0.660652
267: Dice Coeff = 0.758245
0.758245
268: Dice Coeff = 0.360577
0.360577
269: Dice Coeff = 0.896346
0.896346
Dice eval av. : 0.813215
In [22]:
#
#   Show History
#
mode = "SHOW_HISTORY"
if mode == "SHOW_HISTORY":
    # load pickle
    print(dname_checkpoints + '/' + fname_history)
    history = pickle.load(open(dname_checkpoints + '/' + fname_history, 'rb'))
    
    for k in history.keys():
        plt.plot(history[k])
        plt.title(k)
        plt.show()
checkpoints_fcn01/history.pkl